In [1]:
%reload_ext autoreload
%autoreload 2
import sys
sys.path.append('..')
from helper import nn
from helper import logistic_regression as lr
import numpy as np
In [2]:
X_raw, y_raw = nn.load_data('ex4data1.mat', transpose=False)
X = np.insert(X_raw, 0, np.ones(X_raw.shape[0]), axis=1)
X.shape
Out[2]:
In [3]:
y_raw
Out[3]:
In [4]:
y = nn.expand_y(y_raw)
y
Out[4]:
In [5]:
t1, t2 = nn.load_weight('ex4weights.mat')
t1.shape, t2.shape
Out[5]:
In [6]:
theta = nn.serialize(t1, t2) # flatten params
theta.shape
Out[6]:
In [7]:
_, _, _, _, h = nn.feed_forward(theta, X)
h # 5000*10
Out[7]:
think about this, now we have $y$ and $h_{\theta} \in R^{5000 \times 10}$
If you just ignore the m and k dimention, pairwisely this computation is trivial.
the eqation $= y*log(h_{\theta}) - (1-y)*log(1-h_{\theta})$
all you need to do after pairwise computation is sums this 2d array up and divided by m
In [8]:
nn.cost(theta, X, y)
Out[8]:
the first column of t1 and t2 is intercept $\theta$, just forget them when you do regularization
In [9]:
nn.regularized_cost(theta, X, y)
Out[9]:
In [ ]: